#1. Start the cluster
docker compose up -d

#2. Access the bash of worker 1 and move into the /spark/bin folder
#Container bash from local prompt: docker exec -it <container name> /bin/bash

#3. Start the spark shell
./spark-shell --master spark://spark-master:7077

#4. Check the cluster status
http://localhost:14040/